library("forecast")
## Registered S3 method overwritten by 'quantmod':
## method from
## as.zoo.data.frame zoo
library("graphics")
library("TTR")
library("TSA")
## Registered S3 methods overwritten by 'TSA':
## method from
## fitted.Arima forecast
## plot.Arima forecast
##
## Attaching package: 'TSA'
## The following objects are masked from 'package:stats':
##
## acf, arima
## The following object is masked from 'package:utils':
##
## tar
data1 <- readxl::read_excel("/Users/fauzanfajari/Downloads/IHSG.xlsx", sheet = "Sheet1")
data1$Periode <- as.integer(data1$Periode)
Data yang digunakan adalah data harga saham IHSG harian dari tanggal 20 Maret 2024 hingga 26 Agustus 2024. Data ini terdiri dari 100 observasi dan 2 variabel, yaitu Periode dan Harga. Data ini akan digunakan untuk melakukan analisis time series dengan menggunakan metode Simple Moving Average (SMA), Double Moving Average (DMA), Exponential Smoothing (SES), Holt-Winters, dan Winter. Data ini akan dibagi menjadi dua bagian, yaitu data latih dan data uji. Data latih terdiri dari 76 observasi dan data uji terdiri dari 24 observasi. Data latih digunakan untuk membuat model dan data uji digunakan untuk menguji model yang telah dibuat.
str(data1)
## tibble [100 × 2] (S3: tbl_df/tbl/data.frame)
## $ Periode: int [1:100] 1 2 3 4 5 6 7 8 9 10 ...
## $ Harga : num [1:100] 7331 7338 7350 7378 7366 ...
dim(data1)
## [1] 100 2
data1.ts <- ts(data1$Harga)
summary(data1.ts)
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 6727 7097 7217 7186 7295 7617
ts.plot(data1.ts, xlab="Time Period ", ylab="Harga Saham",
main = "Time Series Plot")
points(data1.ts)
training_ma <- data1[1:76,]
testing_ma <- data1[77:100,]
train_ma.ts <- ts(training_ma$Harga)
test_ma.ts <- ts(testing_ma$Harga)
Eksplorasi Data
plot(data1.ts, col="red",main="Plot semua data")
points(data1.ts)
#eksplorasi data latih
plot(train_ma.ts, col="blue",main="Plot data latih")
points(train_ma.ts)
#eksplorasi data uji
plot(test_ma.ts, col="blue",main="Plot data uji")
points(test_ma.ts)
library(ggplot2)
ggplot() +
geom_line(data = data1, aes(x = Periode, y = Harga, col = "Data Latih")) +
geom_line(data = testing_ma, aes(x = Periode, y = Harga, col = "Data Uji")) +
labs(x = "Periode Waktu", y = "Sales", color = "Legend") +
scale_colour_manual(name="Keterangan:", breaks = c("Data Latih", "Data Uji"),
values = c("blue", "red")) +
theme_bw() + theme(legend.position = "bottom",
plot.caption = element_text(hjust=0.5, size=12))
SMA
Pemulusan menggunakan metode SMA dilakukan dengan fungsi
SMA(). Dalam hal ini akan dilakukan pemulusan dengan
parameter m=4.
data.sma<-SMA(train_ma.ts, n=4)
data.sma
## Time Series:
## Start = 1
## End = 76
## Frequency = 1
## [1] NA NA NA 7349.348 7357.982 7350.917 7335.582 7292.407
## [9] 7260.237 7224.426 7215.822 7236.277 7218.233 7209.232 7187.336 7137.445
## [17] 7114.698 7109.691 7111.621 7128.615 7119.179 7130.422 7145.338 7135.870
## [25] 7160.532 7155.559 7127.913 7120.755 7111.890 7098.858 7112.913 7152.388
## [33] 7206.882 7252.614 7254.165 7248.087 7212.882 7209.616 7198.164 7151.104
## [41] 7099.683 7045.324 7035.095 7013.477 7014.517 6979.957 6935.516 6912.521
## [49] 6881.322 6864.725 6818.046 6785.853 6778.159 6790.262 6828.846 6867.792
## [57] 6889.372 6911.366 6954.969 7019.199 7074.074 7131.275 7170.603 7199.040
## [65] 7230.498 7248.760 7265.298 7277.057 7296.208 7298.473 7282.786 7263.738
## [73] 7262.111 7266.019 7290.440 7312.850
Data pemulusan pada periode ke-t selanjutnya digunakan sebagai nilai peramalan pada periode ke t+1 sehingga hasil peramalan 1 periode kedepan adalah sebagai berikut.
data.ramal<-c(NA,data.sma)
data.ramal
## [1] NA NA NA NA 7349.348 7357.982 7350.917 7335.582
## [9] 7292.407 7260.237 7224.426 7215.822 7236.277 7218.233 7209.232 7187.336
## [17] 7137.445 7114.698 7109.691 7111.621 7128.615 7119.179 7130.422 7145.338
## [25] 7135.870 7160.532 7155.559 7127.913 7120.755 7111.890 7098.858 7112.913
## [33] 7152.388 7206.882 7252.614 7254.165 7248.087 7212.882 7209.616 7198.164
## [41] 7151.104 7099.683 7045.324 7035.095 7013.477 7014.517 6979.957 6935.516
## [49] 6912.521 6881.322 6864.725 6818.046 6785.853 6778.159 6790.262 6828.846
## [57] 6867.792 6889.372 6911.366 6954.969 7019.199 7074.074 7131.275 7170.603
## [65] 7199.040 7230.498 7248.760 7265.298 7277.057 7296.208 7298.473 7282.786
## [73] 7263.738 7262.111 7266.019 7290.440 7312.850
Selanjutnya akan dilakukan peramalan sejumlah data uji yaitu 24 periode. Pada metode SMA, hasil peramalan 24 periode ke depan akan bernilai sama dengan hasil peramalan 1 periode kedepan. Dalam hal ini akan dilakukan pengguabungan data aktual train, data hasil pemulusan dan data hasil ramalan 24 periode kedepan.
data.gab<-cbind(aktual=c(train_ma.ts,rep(NA,24)),pemulusan=c(data.sma,rep(NA,24)),ramalan=c(data.ramal,rep(data.ramal[length(data.ramal)],23)))
data.gab
## aktual pemulusan ramalan
## [1,] 7331.128 NA NA
## [2,] 7338.353 NA NA
## [3,] 7350.152 NA NA
## [4,] 7377.760 7349.348 NA
## [5,] 7365.664 7357.982 7349.348
## [6,] 7310.092 7350.917 7357.982
## [7,] 7288.813 7335.582 7350.917
## [8,] 7205.061 7292.407 7335.582
## [9,] 7236.984 7260.237 7292.407
## [10,] 7166.844 7224.426 7260.237
## [11,] 7254.399 7215.822 7224.426
## [12,] 7286.882 7236.277 7215.822
## [13,] 7164.807 7218.233 7236.277
## [14,] 7130.841 7209.232 7218.233
## [15,] 7166.814 7187.336 7209.232
## [16,] 7087.317 7137.445 7187.336
## [17,] 7073.820 7114.698 7137.445
## [18,] 7110.813 7109.691 7114.698
## [19,] 7174.533 7111.621 7109.691
## [20,] 7155.294 7128.615 7111.621
## [21,] 7036.075 7119.179 7128.615
## [22,] 7155.784 7130.422 7119.179
## [23,] 7234.197 7145.338 7130.422
## [24,] 7117.425 7135.870 7145.338
## [25,] 7134.724 7160.532 7135.870
## [26,] 7135.890 7155.559 7160.532
## [27,] 7123.612 7127.913 7155.559
## [28,] 7088.795 7120.755 7127.913
## [29,] 7099.261 7111.890 7120.755
## [30,] 7083.763 7098.858 7111.890
## [31,] 7179.831 7112.913 7098.858
## [32,] 7246.696 7152.388 7112.913
## [33,] 7317.238 7206.882 7152.388
## [34,] 7266.691 7252.614 7206.882
## [35,] 7186.037 7254.165 7252.614
## [36,] 7222.382 7248.087 7254.165
## [37,] 7176.420 7212.882 7248.087
## [38,] 7253.626 7209.616 7212.882
## [39,] 7140.229 7198.164 7209.616
## [40,] 7034.142 7151.104 7198.164
## [41,] 6970.736 7099.683 7151.104
## [42,] 7036.191 7045.324 7099.683
## [43,] 7099.312 7035.095 7045.324
## [44,] 6947.670 7013.477 7035.095
## [45,] 6974.897 7014.517 7013.477
## [46,] 6897.950 6979.957 7014.517
## [47,] 6921.548 6935.516 6979.957
## [48,] 6855.691 6912.521 6935.516
## [49,] 6850.097 6881.322 6912.521
## [50,] 6831.564 6864.725 6881.322
## [51,] 6734.832 6818.046 6864.725
## [52,] 6726.919 6785.853 6818.046
## [53,] 6819.321 6778.159 6785.853
## [54,] 6879.978 6790.262 6778.159
## [55,] 6889.165 6828.846 6790.262
## [56,] 6882.704 6867.792 6828.846
## [57,] 6905.642 6889.372 6867.792
## [58,] 6967.951 6911.366 6889.372
## [59,] 7063.577 6954.969 6911.366
## [60,] 7139.626 7019.199 6954.969
## [61,] 7125.142 7074.074 7019.199
## [62,] 7196.755 7131.275 7074.074
## [63,] 7220.889 7170.603 7131.275
## [64,] 7253.372 7199.040 7170.603
## [65,] 7250.977 7230.498 7199.040
## [66,] 7269.801 7248.760 7230.498
## [67,] 7287.042 7265.298 7248.760
## [68,] 7300.407 7277.057 7265.298
## [69,] 7327.580 7296.208 7277.057
## [70,] 7278.863 7298.473 7296.208
## [71,] 7224.293 7282.786 7298.473
## [72,] 7224.218 7263.738 7282.786
## [73,] 7321.071 7262.111 7263.738
## [74,] 7294.495 7266.019 7262.111
## [75,] 7321.976 7290.440 7266.019
## [76,] 7313.857 7312.850 7290.440
## [77,] NA NA 7312.850
## [78,] NA NA 7312.850
## [79,] NA NA 7312.850
## [80,] NA NA 7312.850
## [81,] NA NA 7312.850
## [82,] NA NA 7312.850
## [83,] NA NA 7312.850
## [84,] NA NA 7312.850
## [85,] NA NA 7312.850
## [86,] NA NA 7312.850
## [87,] NA NA 7312.850
## [88,] NA NA 7312.850
## [89,] NA NA 7312.850
## [90,] NA NA 7312.850
## [91,] NA NA 7312.850
## [92,] NA NA 7312.850
## [93,] NA NA 7312.850
## [94,] NA NA 7312.850
## [95,] NA NA 7312.850
## [96,] NA NA 7312.850
## [97,] NA NA 7312.850
## [98,] NA NA 7312.850
## [99,] NA NA 7312.850
## [100,] NA NA 7312.850
Adapun plot data deret waktu dari hasil peramalan yang dilakukan adalah sebagai berikut.
ts.plot(data1.ts, xlab="Time Period ", ylab="Harga", main= "SMA N=4 Data Harga IHSG")
points(data1.ts)
lines(data.gab[,2],col="green",lwd=2)
lines(data.gab[,3],col="red",lwd=2)
legend("topleft",c("data aktual","data pemulusan","data peramalan"), lty=8, col=c("black","green","red"), cex=0.5)
Akurasi Data Latih dan Data Uji
Selanjutnya perhitungan akurasi dilakukan dengan ukuran akurasi Sum Squares Error (SSE), Mean Square Error (MSE) dan Mean Absolute Percentage Error (MAPE). Perhitungan akurasi dilakukan baik pada data latih maupun pada data uji.
#Data Latih
error_train.sma = train_ma.ts-data.ramal[1:length(train_ma.ts)]
SSE_train.sma = sum(error_train.sma[5:length(train_ma.ts)]^2)
MSE_train.sma = mean(error_train.sma[5:length(train_ma.ts)]^2)
MAPE_train.sma = mean(abs((error_train.sma[5:length(train_ma.ts)]/train_ma.ts[5:length(train_ma.ts)])*100))
akurasi_train.sma <- matrix(c(SSE_train.sma, MSE_train.sma, MAPE_train.sma))
row.names(akurasi_train.sma)<- c("SSE", "MSE", "MAPE")
colnames(akurasi_train.sma) <- c("Akurasi m = 4")
akurasi_train.sma
## Akurasi m = 4
## SSE 4.614187e+05
## MSE 6.408592e+03
## MAPE 9.706907e-01
Dalam hal ini nilai MAPE data latih pada metode pemulusan SMA kurang dari 2%, nilai ini dapat dikategorikan sebagai nilai akurasi yang sangat baik. Selanjutnya dilakukan perhitungan nilai MAPE data uji pada metode pemulusan SMA
#Data Uji
error_test.sma = test_ma.ts-data.gab[77:100,3]
SSE_test.sma = sum(error_test.sma^2)
MSE_test.sma = mean(error_test.sma^2)
MAPE_test.sma = mean(abs((error_test.sma/test_ma.ts*100)))
akurasi_test.sma <- matrix(c(SSE_test.sma, MSE_test.sma, MAPE_test.sma))
row.names(akurasi_test.sma)<- c("SSE", "MSE", "MAPE")
colnames(akurasi_test.sma) <- c("Akurasi m = 4")
akurasi_test.sma
## Akurasi m = 4
## SSE 4.911602e+05
## MSE 2.046501e+04
## MAPE 1.554456e+00
Perhitungan akurasi menggunakan data latih menghasilkan nilai MAPE yang kurang dari 10% sehingga nilai akurasi ini dapat dikategorikan sebagai sangat baik.
DMA
Metode pemulusan Double Moving Average (DMA) pada dasarnya mirip dengan SMA. Namun demikian, metode ini lebih cocok digunakan untuk pola data trend. Proses pemulusan dengan rata rata dalam metode ini dilakukan sebanyak 2 kali.
dma <- SMA(data.sma, n = 4)
At <- 2*data.sma - dma
Bt <- 2/(4-1)*(data.sma - dma)
data.dma<- At+Bt
data.ramal2<- c(NA, data.dma)
t = 1:24
f = c()
for (i in t) {
f[i] = At[length(At)] + Bt[length(Bt)]*(i)
}
data.gab2 <- cbind(aktual = c(train_ma.ts,rep(NA,24)), pemulusan1 = c(data.sma,rep(NA,24)),pemulusan2 = c(data.dma, rep(NA,24)),At = c(At, rep(NA,24)), Bt = c(Bt,rep(NA,24)),ramalan = c(data.ramal2, f[-1]))
data.gab2
## aktual pemulusan1 pemulusan2 At Bt ramalan
## [1,] 7331.128 NA NA NA NA NA
## [2,] 7338.353 NA NA NA NA NA
## [3,] 7350.152 NA NA NA NA NA
## [4,] 7377.760 7349.348 NA NA NA NA
## [5,] 7365.664 7357.982 NA NA NA NA
## [6,] 7310.092 7350.917 NA NA NA NA
## [7,] 7288.813 7335.582 7314.124 7322.707 -8.583455 NA
## [8,] 7205.061 7292.407 7222.716 7250.593 -27.876465 7314.124
## [9,] 7236.984 7260.237 7177.657 7210.689 -33.032369 7222.716
## [10,] 7166.844 7224.426 7134.863 7170.688 -35.825073 7177.657
## [11,] 7254.399 7215.822 7161.820 7183.421 -21.600728 7134.863
## [12,] 7286.882 7236.277 7239.755 7238.364 1.391113 7161.820
## [13,] 7164.807 7218.233 7209.139 7212.777 -3.637614 7239.755
## [14,] 7130.841 7209.232 7191.467 7198.573 -7.105957 7209.139
## [15,] 7166.814 7187.336 7144.947 7161.902 -16.955770 7191.467
## [16,] 7087.317 7137.445 7053.083 7086.828 -33.744507 7144.947
## [17,] 7073.820 7114.698 7035.565 7067.218 -31.653198 7053.083
## [18,] 7110.813 7109.691 7063.689 7082.089 -18.400960 7035.565
## [19,] 7174.533 7111.621 7100.383 7104.878 -4.495219 7063.689
## [20,] 7155.294 7128.615 7149.380 7141.074 8.305908 7100.383
## [21,] 7036.075 7119.179 7122.350 7121.081 1.268310 7149.380
## [22,] 7155.784 7130.422 7143.693 7138.384 5.308390 7122.350
## [23,] 7234.197 7145.338 7169.420 7159.787 9.632853 7143.693
## [24,] 7117.425 7135.870 7141.151 7139.038 2.112122 7169.420
## [25,] 7134.724 7160.532 7189.686 7178.024 11.661336 7141.151
## [26,] 7135.890 7155.559 7165.949 7161.793 4.156108 7189.686
## [27,] 7123.612 7127.913 7099.486 7110.857 -11.370585 7165.949
## [28,] 7088.795 7120.755 7086.698 7100.321 -13.623067 7099.486
## [29,] 7099.261 7111.890 7083.324 7094.750 -11.426392 7086.698
## [30,] 7083.763 7098.858 7072.198 7082.862 -10.664022 7083.324
## [31,] 7179.831 7112.913 7115.927 7114.721 1.205872 7072.198
## [32,] 7246.696 7152.388 7208.014 7185.764 22.250590 7115.927
## [33,] 7317.238 7206.882 7313.752 7271.004 42.747945 7208.014
## [34,] 7266.691 7252.614 7371.639 7324.029 47.609884 7313.752
## [35,] 7186.037 7254.165 7316.921 7291.819 25.102091 7371.639
## [36,] 7222.382 7248.087 7260.837 7255.737 5.099914 7316.921
## [37,] 7176.420 7212.882 7164.458 7183.828 -19.369812 7260.837
## [38,] 7253.626 7209.616 7173.664 7188.045 -14.381022 7164.458
## [39,] 7140.229 7198.164 7166.459 7179.141 -12.682169 7173.664
## [40,] 7034.142 7151.104 7081.375 7109.267 -27.891683 7166.459
## [41,] 6970.736 7099.683 6991.419 7034.724 -43.305827 7081.375
## [42,] 7036.191 7045.324 6914.917 6967.080 -52.163045 6991.419
## [43,] 7099.312 7035.095 6955.584 6987.389 -31.804382 6914.917
## [44,] 6947.670 7013.477 6955.281 6978.559 -23.278564 6955.584
## [45,] 6974.897 7014.517 6993.541 7001.931 -8.390747 6955.281
## [46,] 6897.950 6979.957 6928.616 6949.153 -20.536336 6993.541
## [47,] 6921.548 6935.516 6851.598 6885.165 -33.567200 6928.616
## [48,] 6855.691 6912.521 6832.344 6864.415 -32.071086 6851.598
## [49,] 6850.097 6881.322 6804.642 6835.314 -30.671733 6832.344
## [50,] 6831.564 6864.725 6808.398 6830.929 -22.530721 6804.642
## [51,] 6734.832 6818.046 6732.867 6766.939 -34.071655 6808.398
## [52,] 6726.919 6785.853 6699.797 6734.220 -34.422241 6732.867
## [53,] 6819.321 6778.159 6722.264 6744.622 -22.357870 6699.797
## [54,] 6879.978 6790.262 6785.566 6787.445 -1.878438 6722.264
## [55,] 6889.165 6828.846 6883.955 6861.911 22.043783 6785.566
## [56,] 6882.704 6867.792 6953.671 6919.319 34.351481 6883.955
## [57,] 6905.642 6889.372 6964.879 6934.677 30.202800 6953.671
## [58,] 6967.951 6911.366 6973.068 6948.387 24.681132 6964.879
## [59,] 7063.577 6954.969 7036.792 7004.063 32.729329 6973.068
## [60,] 7139.626 7019.199 7144.987 7094.672 50.315125 7036.792
## [61,] 7125.142 7074.074 7214.361 7158.246 56.114827 7144.987
## [62,] 7196.755 7131.275 7275.268 7217.671 57.597209 7214.361
## [63,] 7220.889 7170.603 7290.295 7242.418 47.876811 7275.268
## [64,] 7253.372 7199.040 7291.192 7254.331 36.861084 7290.295
## [65,] 7250.977 7230.498 7309.905 7278.143 31.762878 7291.192
## [66,] 7269.801 7248.760 7309.651 7285.294 24.356405 7309.905
## [67,] 7287.042 7265.298 7314.296 7294.697 19.599386 7309.651
## [68,] 7300.407 7277.057 7313.146 7298.710 14.435710 7314.296
## [69,] 7327.580 7296.208 7336.836 7320.585 16.251343 7313.146
## [70,] 7278.863 7298.473 7322.163 7312.687 9.476135 7336.836
## [71,] 7224.293 7282.786 7273.044 7276.941 -3.896667 7322.163
## [72,] 7224.218 7263.738 7227.800 7242.176 -14.375183 7273.044
## [73,] 7321.071 7262.111 7237.668 7247.445 -9.777323 7227.800
## [74,] 7294.495 7266.019 7261.612 7263.375 -1.762960 7237.668
## [75,] 7321.976 7290.440 7323.545 7310.303 13.241862 7261.612
## [76,] 7313.857 7312.850 7362.841 7342.844 19.996501 7323.545
## [77,] NA NA NA NA NA 7362.841
## [78,] NA NA NA NA NA 7382.837
## [79,] NA NA NA NA NA 7402.834
## [80,] NA NA NA NA NA 7422.830
## [81,] NA NA NA NA NA 7442.827
## [82,] NA NA NA NA NA 7462.823
## [83,] NA NA NA NA NA 7482.820
## [84,] NA NA NA NA NA 7502.816
## [85,] NA NA NA NA NA 7522.813
## [86,] NA NA NA NA NA 7542.809
## [87,] NA NA NA NA NA 7562.806
## [88,] NA NA NA NA NA 7582.802
## [89,] NA NA NA NA NA 7602.799
## [90,] NA NA NA NA NA 7622.795
## [91,] NA NA NA NA NA 7642.792
## [92,] NA NA NA NA NA 7662.788
## [93,] NA NA NA NA NA 7682.785
## [94,] NA NA NA NA NA 7702.781
## [95,] NA NA NA NA NA 7722.778
## [96,] NA NA NA NA NA 7742.774
## [97,] NA NA NA NA NA 7762.771
## [98,] NA NA NA NA NA 7782.767
## [99,] NA NA NA NA NA 7802.764
## [100,] NA NA NA NA NA 7822.760
Hasil pemulusan menggunakan metode DMA divisualisasikan sebagai berikut
ts.plot(data1.ts, xlab="Time Period ", ylab="Harga", main= "DMA N=4 Data Harga IHSG")
points(data1.ts)
lines(data.gab2[,3],col="green",lwd=2)
lines(data.gab2[,6],col="red",lwd=2)
legend("topleft",c("data aktual","data pemulusan","data peramalan"), lty=8, col=c("black","green","red"), cex=0.8)
Akurasi
#Data Latih
error_train.dma = train_ma.ts-data.ramal2[1:length(train_ma.ts)]
SSE_train.dma = sum(error_train.dma[8:length(train_ma.ts)]^2)
MSE_train.dma = mean(error_train.dma[8:length(train_ma.ts)]^2)
MAPE_train.dma = mean(abs((error_train.dma[8:length(train_ma.ts)]/train_ma.ts[8:length(train_ma.ts)])*100))
akurasi_train.dma <- matrix(c(SSE_train.dma, MSE_train.dma, MAPE_train.dma))
row.names(akurasi_train.dma)<- c("SSE", "MSE", "MAPE")
colnames(akurasi_train.dma) <- c("Akurasi m = 4")
akurasi_train.dma
## Akurasi m = 4
## SSE 4.044984e+05
## MSE 5.862296e+03
## MAPE 8.574602e-01
#Data Uji
error_test.dma = test_ma.ts-data.gab2[77:100,6]
SSE_test.dma = sum(error_test.dma^2)
MSE_test.dma = mean(error_test.dma^2)
MAPE_test.dma = mean(abs((error_test.dma/test_ma.ts*100)))
akurasi_test.dma <- matrix(c(SSE_test.dma, MSE_test.dma, MAPE_test.dma))
row.names(akurasi_test.dma)<- c("SSE", "MSE", "MAPE")
colnames(akurasi_test.dma) <- c("Akurasi m = 4")
akurasi_test.dma
## Akurasi m = 4
## SSE 1.720799e+06
## MSE 7.169997e+04
## MAPE 3.429478e+00
Perhitungan akurasi menggunakan data latih dan data uji menghasilkan nilai MAPE yang kurang dari 10% sehingga nilai akurasi ini dapat dikategorikan sebagai sangat baik.
Pada data latih, metode DMA lebih baik dibandingkan dengan metode SMA, sedangkan pada data uji, metode SMA lebih baik dibandingkan DMA
#SES
ses.1 <- ses(train_ma.ts, h = 24, alpha = 0.2)
plot(ses.1)
ses.1
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7281.836 7152.154 7411.519 7083.504 7480.168
## 78 7281.836 7149.586 7414.087 7079.576 7484.096
## 79 7281.836 7147.066 7416.606 7075.723 7487.949
## 80 7281.836 7144.593 7419.079 7071.941 7491.731
## 81 7281.836 7142.164 7421.509 7068.226 7495.447
## 82 7281.836 7139.776 7423.896 7064.574 7499.098
## 83 7281.836 7137.428 7426.245 7060.983 7502.690
## 84 7281.836 7135.117 7428.555 7057.449 7506.224
## 85 7281.836 7132.842 7430.830 7053.970 7509.703
## 86 7281.836 7130.602 7433.071 7050.543 7513.129
## 87 7281.836 7128.394 7435.279 7047.166 7516.506
## 88 7281.836 7126.217 7437.455 7043.838 7519.835
## 89 7281.836 7124.071 7439.602 7040.555 7523.118
## 90 7281.836 7121.953 7441.720 7037.316 7526.357
## 91 7281.836 7119.863 7443.810 7034.119 7529.553
## 92 7281.836 7117.799 7445.873 7030.964 7532.709
## 93 7281.836 7115.762 7447.911 7027.847 7535.825
## 94 7281.836 7113.749 7449.924 7024.768 7538.904
## 95 7281.836 7111.759 7451.913 7021.726 7541.947
## 96 7281.836 7109.793 7453.879 7018.719 7544.954
## 97 7281.836 7107.849 7455.824 7015.746 7547.927
## 98 7281.836 7105.926 7457.746 7012.805 7550.867
## 99 7281.836 7104.025 7459.648 7009.897 7553.776
## 100 7281.836 7102.143 7461.530 7007.019 7556.653
ses.2<- ses(train_ma.ts, h = 24, alpha = 0.7)
plot(ses.2)
ses.2
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7313.801 7232.394 7395.208 7189.300 7438.302
## 78 7313.801 7214.431 7413.171 7161.828 7465.774
## 79 7313.801 7199.251 7428.351 7138.612 7488.989
## 80 7313.801 7185.860 7441.742 7118.132 7509.470
## 81 7313.801 7173.743 7453.859 7099.601 7528.001
## 82 7313.801 7162.594 7465.008 7082.550 7545.051
## 83 7313.801 7152.213 7475.389 7066.673 7560.929
## 84 7313.801 7142.459 7485.143 7051.756 7575.845
## 85 7313.801 7133.232 7494.370 7037.644 7589.958
## 86 7313.801 7124.453 7503.149 7024.219 7603.383
## 87 7313.801 7116.064 7511.538 7011.389 7616.213
## 88 7313.801 7108.017 7519.585 6999.081 7628.520
## 89 7313.801 7100.273 7527.329 6987.237 7640.364
## 90 7313.801 7092.799 7534.802 6975.808 7651.793
## 91 7313.801 7085.571 7542.031 6964.753 7662.848
## 92 7313.801 7078.564 7549.037 6954.038 7673.564
## 93 7313.801 7071.761 7555.841 6943.632 7683.969
## 94 7313.801 7065.143 7562.459 6933.512 7694.090
## 95 7313.801 7058.697 7568.905 6923.653 7703.949
## 96 7313.801 7052.410 7575.192 6914.038 7713.564
## 97 7313.801 7046.270 7581.331 6904.648 7722.953
## 98 7313.801 7040.269 7587.333 6895.470 7732.132
## 99 7313.801 7034.396 7593.206 6886.488 7741.114
## 100 7313.801 7028.644 7598.958 6877.691 7749.910
autoplot(ses.1) +
autolayer(fitted(ses.1), series="Fitted") +
ylab("Harga Saham IHSG") + xlab("Periode")
Pada fungsi ses() , terdapat beberapa argumen yang umum
digunakan, yaitu nilia y , gamma ,
beta , alpha , dan h .
Nilai y adalah nilai data deret waktu,
gamma adalah parameter pemulusan untuk komponen musiman,
beta adalah parameter pemulusan untuk tren, dan
alpha adalah parameter pemulusan untuk stasioner, serta
h adalah banyaknya periode yang akan diramalkan.
Kasus di atas merupakan contoh inisialisasi nilai parameter \(\lambda\) dengan nilai alpha
0,2 dan 0,7 dan banyak periode data yang akan diramalkan adalah sebanyak
10 periode. Selanjutnya akan digunakan fungsi HoltWinters()
dengan nilai inisialisasi parameter dan panjang periode peramalan yang
sama dengan fungsi ses()
#Holt-Winters
ses1<- HoltWinters(train_ma.ts, gamma = FALSE, beta = FALSE, alpha = 0.2)
plot(ses1)
#ramalan
ramalan1<- forecast(ses1, h=24)
ramalan1
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7281.836 7152.117 7411.556 7083.448 7480.225
## 78 7281.836 7149.548 7414.125 7079.519 7484.154
## 79 7281.836 7147.028 7416.645 7075.665 7488.008
## 80 7281.836 7144.554 7419.118 7071.881 7491.791
## 81 7281.836 7142.124 7421.548 7068.165 7495.507
## 82 7281.836 7139.736 7423.937 7064.512 7499.160
## 83 7281.836 7137.387 7426.286 7060.920 7502.752
## 84 7281.836 7135.076 7428.597 7057.385 7506.287
## 85 7281.836 7132.800 7430.872 7053.905 7509.767
## 86 7281.836 7130.559 7433.114 7050.477 7513.195
## 87 7281.836 7128.350 7435.322 7047.100 7516.573
## 88 7281.836 7126.173 7437.499 7043.770 7519.903
## 89 7281.836 7124.026 7439.647 7040.486 7523.186
## 90 7281.836 7121.908 7441.765 7037.246 7526.426
## 91 7281.836 7119.817 7443.856 7034.049 7529.624
## 92 7281.836 7117.753 7445.920 7030.892 7532.780
## 93 7281.836 7115.714 7447.958 7027.775 7535.898
## 94 7281.836 7113.701 7449.972 7024.695 7538.977
## 95 7281.836 7111.711 7451.962 7021.652 7542.020
## 96 7281.836 7109.744 7453.928 7018.644 7545.029
## 97 7281.836 7107.799 7455.873 7015.670 7548.003
## 98 7281.836 7105.876 7457.796 7012.729 7550.944
## 99 7281.836 7103.974 7459.698 7009.819 7553.853
## 100 7281.836 7102.092 7461.581 7006.941 7556.732
ses2<- HoltWinters(train_ma.ts, gamma = FALSE, beta = FALSE, alpha = 0.7)
plot(ses2)
#ramalan
ramalan2<- forecast(ses2, h=24)
ramalan2
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7313.801 7232.393 7395.209 7189.298 7438.303
## 78 7313.801 7214.430 7413.172 7161.826 7465.776
## 79 7313.801 7199.250 7428.352 7138.610 7488.992
## 80 7313.801 7185.858 7441.743 7118.130 7509.472
## 81 7313.801 7173.741 7453.860 7099.598 7528.003
## 82 7313.801 7162.592 7465.009 7082.548 7545.054
## 83 7313.801 7152.211 7475.391 7066.670 7560.931
## 84 7313.801 7142.457 7485.145 7051.753 7575.848
## 85 7313.801 7133.229 7494.372 7037.641 7589.961
## 86 7313.801 7124.451 7503.151 7024.215 7603.386
## 87 7313.801 7116.062 7511.540 7011.385 7616.217
## 88 7313.801 7108.014 7519.587 6999.078 7628.524
## 89 7313.801 7100.270 7527.332 6987.234 7640.368
## 90 7313.801 7092.797 7534.805 6975.804 7651.797
## 91 7313.801 7085.568 7542.033 6964.749 7662.852
## 92 7313.801 7078.562 7549.040 6954.034 7673.568
## 93 7313.801 7071.758 7555.844 6943.628 7683.974
## 94 7313.801 7065.140 7562.462 6933.507 7694.095
## 95 7313.801 7058.694 7568.908 6923.649 7703.953
## 96 7313.801 7052.407 7575.195 6914.033 7713.569
## 97 7313.801 7046.267 7581.334 6904.644 7722.958
## 98 7313.801 7040.266 7587.336 6895.465 7732.137
## 99 7313.801 7034.393 7593.209 6886.483 7741.119
## 100 7313.801 7028.641 7598.961 6877.686 7749.916
#SES
ses.opt <- ses(train_ma.ts, h = 24, alpha = NULL)
plot(ses.opt)
ses.opt
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7314.06 7235.156 7392.964 7193.387 7434.733
## 78 7314.06 7203.999 7424.122 7145.736 7482.385
## 79 7314.06 7179.891 7448.229 7108.867 7519.254
## 80 7314.06 7159.499 7468.621 7077.680 7550.440
## 81 7314.06 7141.501 7486.620 7050.153 7577.967
## 82 7314.06 7125.210 7502.911 7025.238 7602.882
## 83 7314.06 7110.216 7517.904 7002.308 7625.812
## 84 7314.06 7096.253 7531.867 6980.953 7647.168
## 85 7314.06 7083.132 7544.988 6960.886 7667.234
## 86 7314.06 7070.718 7557.402 6941.900 7686.220
## 87 7314.06 7058.907 7569.214 6923.837 7704.284
## 88 7314.06 7047.619 7580.501 6906.573 7721.547
## 89 7314.06 7036.790 7591.330 6890.012 7738.108
## 90 7314.06 7026.368 7601.752 6874.074 7754.047
## 91 7314.06 7016.311 7611.809 6858.693 7769.427
## 92 7314.06 7006.583 7621.537 6843.815 7784.305
## 93 7314.06 6997.154 7630.967 6829.393 7798.727
## 94 7314.06 6987.996 7640.124 6815.389 7812.731
## 95 7314.06 6979.090 7649.031 6801.767 7826.353
## 96 7314.06 6970.414 7657.707 6788.498 7839.622
## 97 7314.06 6961.951 7666.169 6775.556 7852.564
## 98 7314.06 6953.687 7674.433 6762.918 7865.203
## 99 7314.06 6945.609 7682.511 6750.563 7877.557
## 100 7314.06 6937.704 7690.416 6738.473 7889.647
#Lamda Optimum Holt Winter
sesopt<- HoltWinters(train_ma.ts, gamma = FALSE, beta = FALSE,alpha = NULL)
sesopt
## Holt-Winters exponential smoothing without trend and without seasonal component.
##
## Call:
## HoltWinters(x = train_ma.ts, alpha = NULL, beta = FALSE, gamma = FALSE)
##
## Smoothing parameters:
## alpha: 0.9724795
## beta : FALSE
## gamma: FALSE
##
## Coefficients:
## [,1]
## a 7314.06
plot(sesopt)
#ramalan
ramalanopt<- forecast(sesopt, h=24)
ramalanopt
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7314.06 7235.157 7392.963 7193.388 7434.732
## 78 7314.06 7203.999 7424.121 7145.736 7482.384
## 79 7314.06 7179.892 7448.229 7108.867 7519.253
## 80 7314.06 7159.500 7468.621 7077.680 7550.440
## 81 7314.06 7141.501 7486.619 7050.153 7577.967
## 82 7314.06 7125.210 7502.910 7025.238 7602.882
## 83 7314.06 7110.216 7517.904 7002.308 7625.812
## 84 7314.06 7096.253 7531.867 6980.953 7647.167
## 85 7314.06 7083.132 7544.988 6960.886 7667.234
## 86 7314.06 7070.718 7557.402 6941.900 7686.220
## 87 7314.06 7058.907 7569.213 6923.837 7704.283
## 88 7314.06 7047.619 7580.501 6906.573 7721.547
## 89 7314.06 7036.790 7591.330 6890.012 7738.108
## 90 7314.06 7026.368 7601.752 6874.074 7754.046
## 91 7314.06 7016.312 7611.809 6858.693 7769.427
## 92 7314.06 7006.583 7621.537 6843.815 7784.305
## 93 7314.06 6997.154 7630.967 6829.393 7798.727
## 94 7314.06 6987.996 7640.124 6815.389 7812.731
## 95 7314.06 6979.090 7649.031 6801.767 7826.353
## 96 7314.06 6970.414 7657.707 6788.498 7839.622
## 97 7314.06 6961.951 7666.169 6775.556 7852.564
## 98 7314.06 6953.687 7674.433 6762.918 7865.202
## 99 7314.06 6945.609 7682.511 6750.563 7877.557
## 100 7314.06 6937.704 7690.416 6738.473 7889.647
Akurasi
Perhitungan akurasi data dapat dilakukan dengan cara langsung maupun manual. Secara langsung, nilai akurasi dapat diambil dari objek yang tersimpan pada hasil SES, yaitu sum of squared errors (SSE). Nilai akurasi lain dapat dihitung pula dari nilai SSE tersebut.
#Data Latih
SSE1<-ses1$SSE
MSE1<-ses1$SSE/length(train_ma.ts)
RMSE1<-sqrt(MSE1)
akurasi1 <- matrix(c(SSE1,MSE1,RMSE1))
row.names(akurasi1)<- c("SSE", "MSE", "RMSE")
colnames(akurasi1) <- c("Akurasi lamda=0.2")
akurasi1
## Akurasi lamda=0.2
## SSE 758984.19175
## MSE 9986.63410
## RMSE 99.93315
SSE2<-ses2$SSE
MSE2<-ses2$SSE/length(train_ma.ts)
RMSE2<-sqrt(MSE2)
akurasi2 <- matrix(c(SSE2,MSE2,RMSE2))
row.names(akurasi2)<- c("SSE", "MSE", "RMSE")
colnames(akurasi2) <- c("Akurasi lamda=0.7")
akurasi2
## Akurasi lamda=0.7
## SSE 298609.90106
## MSE 3929.07765
## RMSE 62.68236
#Cara Manual
fitted1<-ramalan1$fitted
sisaan1<-ramalan1$residuals
head(sisaan1)
## Time Series:
## Start = 1
## End = 6
## Frequency = 1
## [1] NA 7.225097 17.578906 41.671035 21.241125 -38.579366
resid1<-training_ma$Harga-ramalan1$fitted
head(resid1)
## Time Series:
## Start = 1
## End = 6
## Frequency = 1
## [1] NA 7.225097 17.578906 41.671035 21.241125 -38.579366
Berdasarkan nilai SSE, MSE, RMSE, dan MAPE di antara kedua parameter, nilai parameter \(\lambda=0,7\) menghasilkan akurasi yang lebih baik dibanding \(\lambda=0,2\) . Hal ini dilihat dari nilai masing-masing ukuran akurasi yang lebih kecil. Berdasarkan nilai MAPE-nya, hasil ini dapat dikategorikan sebagai peramalan sangat baik.
#Data Uji
#Cara Manual
SSE.1=sum(sisaan1[2:length(train_ma.ts)]^2)
SSE.1
## [1] 758984.2
MSE.1 = SSE.1/length(train_ma.ts)
MSE.1
## [1] 9986.634
MAPE.1 = sum(abs(sisaan1[2:length(train_ma.ts)]/train_ma.ts[2:length(train_ma.ts)])*
100)/length(train_ma.ts)
MAPE.1
## [1] 1.14361
akurasi.1 <- matrix(c(SSE.1,MSE.1,MAPE.1))
row.names(akurasi.1)<- c("SSE", "MSE", "MAPE")
colnames(akurasi.1) <- c("Akurasi lamda=0.2")
akurasi.1
## Akurasi lamda=0.2
## SSE 758984.19175
## MSE 9986.63410
## MAPE 1.14361
fitted2<-ramalan2$fitted
sisaan2<-ramalan2$residuals
head(sisaan2)
## Time Series:
## Start = 1
## End = 6
## Frequency = 1
## [1] NA 7.225097 13.966357 31.797818 -2.556358 -56.339173
resid2<-training_ma$Harga-ramalan2$fitted
head(resid2)
## Time Series:
## Start = 1
## End = 6
## Frequency = 1
## [1] NA 7.225097 13.966357 31.797818 -2.556358 -56.339173
SSE.2=sum(sisaan2[2:length(train_ma.ts)]^2)
SSE.2
## [1] 298609.9
MSE.2 = SSE.2/length(train_ma.ts)
MSE.2
## [1] 3929.078
MAPE.2 = sum(abs(sisaan2[2:length(train_ma.ts)]/train_ma.ts[2:length(train_ma.ts)])*
100)/length(train_ma.ts)
MAPE.2
## [1] 0.7053345
akurasi.2 <- matrix(c(SSE.2,MSE.2,MAPE.2))
row.names(akurasi.2)<- c("SSE", "MSE", "MAPE")
colnames(akurasi.2) <- c("Akurasi lamda=0.7")
akurasi.2
## Akurasi lamda=0.7
## SSE 2.986099e+05
## MSE 3.929078e+03
## MAPE 7.053345e-01
#Data Uji
selisih1<-ramalan1$mean-testing_ma$Harga
SSEtesting1<-sum(selisih1^2)
MSEtesting1<-SSEtesting1/length(testing_ma)
selisih2<-ramalan2$mean-testing_ma$Harga
SSEtesting2<-sum(selisih2^2)
MSEtesting2<-SSEtesting2/length(testing_ma)
selisihopt<-ramalanopt$mean-testing_ma$Harga
SSEtestingopt<-sum(selisihopt^2)
MSEtestingopt<-SSEtestingopt/length(testing_ma)
akurasitesting1 <- matrix(c(SSEtesting1,SSEtesting2,SSEtestingopt))
row.names(akurasitesting1)<- c("SSE1", "SSE2", "SSEopt")
akurasitesting1
## [,1]
## SSE1 557262.9
## SSE2 489862.6
## SSEopt 489516.5
akurasitesting2 <- matrix(c(MSEtesting1,MSEtesting2,MSEtestingopt))
row.names(akurasitesting2)<- c("MSE1", "MSE2", "MSEopt")
akurasitesting2
## [,1]
## MSE1 278631.5
## MSE2 244931.3
## MSEopt 244758.2
accuracy(ramalanopt,testing_ma$Harga)
## ME RMSE MAE MPE MAPE MASE
## Training set -0.2340117 61.15705 49.23781 -0.007003348 0.692053 1.000373
## Test set 27.6875378 142.81639 114.87016 0.340931749 1.556092 2.333837
## ACF1
## Training set 0.003344902
## Test set NA
DES
#Lamda=0.2 dan gamma=0.2
des.1<- HoltWinters(train_ma.ts, gamma = FALSE, beta = 0.2, alpha = 0.2)
plot(des.1)
#ramalan
ramalandes1<- forecast(des.1, h=24)
ramalandes1
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7377.404 7235.895 7518.913 7160.985 7593.823
## 78 7384.345 7238.818 7529.872 7161.780 7606.910
## 79 7391.286 7240.461 7542.111 7160.619 7621.952
## 80 7398.227 7240.751 7555.702 7157.388 7639.065
## 81 7405.167 7239.657 7570.678 7152.041 7658.294
## 82 7412.108 7237.186 7587.030 7144.588 7679.629
## 83 7419.049 7233.376 7604.722 7135.086 7703.012
## 84 7425.990 7228.282 7623.697 7123.622 7728.357
## 85 7432.931 7221.973 7643.888 7110.299 7755.562
## 86 7439.871 7214.521 7665.222 7095.228 7784.515
## 87 7446.812 7205.998 7687.627 7078.518 7815.106
## 88 7453.753 7196.472 7711.034 7060.275 7847.231
## 89 7460.694 7186.007 7735.381 7040.596 7880.792
## 90 7467.635 7174.660 7760.609 7019.569 7915.700
## 91 7474.576 7162.485 7786.666 6997.275 7951.876
## 92 7481.516 7149.528 7813.504 6973.784 7989.248
## 93 7488.457 7135.831 7841.084 6949.161 8027.753
## 94 7495.398 7121.429 7869.367 6923.462 8067.334
## 95 7502.339 7106.357 7898.321 6896.737 8107.941
## 96 7509.280 7090.643 7927.916 6869.031 8149.529
## 97 7516.220 7074.314 7958.127 6840.383 8192.058
## 98 7523.161 7057.393 7988.929 6810.830 8235.492
## 99 7530.102 7039.901 8020.303 6780.405 8279.799
## 100 7537.043 7021.858 8052.228 6749.135 8324.950
#Lamda=0.6 dan gamma=0.3
des.2<- HoltWinters(train_ma.ts, gamma = FALSE, beta = 0.3, alpha = 0.6)
plot(des.2)
#ramalan
ramalandes2<- forecast(des.2, h=24)
ramalandes2
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7326.182 7241.035 7411.329 7195.960 7456.404
## 78 7335.114 7227.128 7443.100 7169.963 7500.264
## 79 7344.045 7208.610 7479.481 7136.915 7551.176
## 80 7352.977 7186.349 7519.605 7098.142 7607.813
## 81 7361.909 7160.918 7562.900 7054.519 7669.298
## 82 7370.840 7132.702 7608.979 7006.639 7735.042
## 83 7379.772 7101.972 7657.572 6954.914 7804.630
## 84 7388.704 7068.930 7708.477 6899.652 7877.755
## 85 7397.635 7033.730 7761.541 6841.090 7954.180
## 86 7406.567 6996.496 7816.638 6779.418 8033.717
## 87 7415.499 6957.330 7873.668 6714.790 8116.208
## 88 7424.430 6916.318 7932.543 6647.340 8201.521
## 89 7433.362 6873.535 7993.189 6577.181 8289.544
## 90 7442.294 6829.046 8055.542 6504.412 8380.175
## 91 7451.225 6782.908 8119.543 6429.122 8473.329
## 92 7460.157 6735.173 8185.141 6351.390 8568.924
## 93 7469.089 6685.888 8252.289 6271.287 8666.890
## 94 7478.020 6635.096 8320.945 6188.878 8767.162
## 95 7486.952 6582.834 8391.070 6104.223 8869.681
## 96 7495.884 6529.140 8462.627 6017.377 8974.390
## 97 7504.815 6474.047 8535.584 5928.391 9081.240
## 98 7513.747 6417.584 8609.910 5837.311 9190.183
## 99 7522.679 6359.782 8685.575 5744.182 9301.175
## 100 7531.610 6300.668 8762.553 5649.046 9414.174
#Visually evaluate the prediction
plot(data1.ts)
lines(des.1$fitted[,1], lty=2, col="blue")
lines(ramalandes1$mean, col="red")
#Lamda dan gamma optimum
des.opt<- HoltWinters(train_ma.ts, gamma = FALSE)
des.opt
## Holt-Winters exponential smoothing with trend and without seasonal component.
##
## Call:
## HoltWinters(x = train_ma.ts, gamma = FALSE)
##
## Smoothing parameters:
## alpha: 0.99091
## beta : 0.0009956327
## gamma: FALSE
##
## Coefficients:
## [,1]
## a 7313.99010
## b 6.70205
plot(des.opt)
#ramalan
ramalandesopt<- forecast(des.opt, h=24)
ramalandesopt
## Point Forecast Lo 80 Hi 80 Lo 95 Hi 95
## 77 7320.692 7241.179 7400.205 7199.088 7442.296
## 78 7327.394 7215.401 7439.388 7156.115 7498.673
## 79 7334.096 7197.074 7471.119 7124.539 7543.654
## 80 7340.798 7182.621 7498.976 7098.887 7582.710
## 81 7347.500 7170.646 7524.355 7077.025 7617.976
## 82 7354.202 7160.431 7547.974 7057.854 7650.550
## 83 7360.904 7151.549 7570.260 7040.723 7681.086
## 84 7367.606 7143.722 7591.491 7025.204 7710.009
## 85 7374.309 7136.755 7611.862 7011.002 7737.615
## 86 7381.011 7130.509 7631.512 6997.901 7764.120
## 87 7387.713 7124.876 7650.550 6985.738 7789.687
## 88 7394.415 7119.773 7669.056 6974.387 7814.442
## 89 7401.117 7115.136 7687.097 6963.747 7838.486
## 90 7407.819 7110.911 7704.727 6953.737 7861.901
## 91 7414.521 7107.053 7721.989 6944.289 7884.753
## 92 7421.223 7103.526 7738.920 6935.347 7907.098
## 93 7427.925 7100.299 7755.551 6926.865 7928.985
## 94 7434.627 7097.346 7771.908 6918.800 7950.453
## 95 7441.329 7094.644 7788.014 6911.120 7971.539
## 96 7448.031 7092.172 7803.890 6903.792 7992.271
## 97 7454.733 7089.914 7819.553 6896.790 8012.676
## 98 7461.435 7087.853 7835.017 6890.091 8032.779
## 99 7468.137 7085.977 7850.297 6883.674 8052.600
## 100 7474.839 7084.273 7865.405 6877.520 8072.158
Hasil akurasi dari data latih didapatkan skenario 2 dengan lamda=0.6 dan gamma=0.3 memiliki hasil yang lebih baik. Namun untuk kedua skenario dapat dikategorikan peramalan sangat baik berdasarkan nilai MAPE-nya.
Akurasi
#Data Latih
ssedes.train1<-des.1$SSE
msedes.train1<-ssedes.train1/length(train_ma.ts)
sisaandes1<-ramalandes1$residuals
head(sisaandes1)
## Time Series:
## Start = 1
## End = 6
## Frequency = 1
## [1] NA NA 4.573731 23.858850 -1.371024 -64.976644
mapedes.train1 <- sum(abs(sisaandes1[3:length(train_ma.ts)]/train_ma.ts[3:length(train_ma.ts)])
*100)/length(train_ma.ts)
akurasides.1 <- matrix(c(ssedes.train1,msedes.train1,mapedes.train1))
row.names(akurasides.1)<- c("SSE", "MSE", "MAPE")
colnames(akurasides.1) <- c("Akurasi lamda=0.2 dan gamma=0.2")
akurasides.1
## Akurasi lamda=0.2 dan gamma=0.2
## SSE 8.900564e+05
## MSE 1.171127e+04
## MAPE 1.196636e+00
ssedes.train2<-des.2$SSE
msedes.train2<-ssedes.train2/length(train_ma.ts)
sisaandes2<-ramalandes2$residuals
head(sisaandes2)
## Time Series:
## Start = 1
## End = 6
## Frequency = 1
## [1] NA NA 4.573731 21.389035 -15.438484 -70.867127
mapedes.train2 <- sum(abs(sisaandes2[3:length(train_ma.ts)]/train_ma.ts[3:length(train_ma.ts)])
*100)/length(train_ma.ts)
akurasides.2 <- matrix(c(ssedes.train2,msedes.train2,mapedes.train2))
row.names(akurasides.2)<- c("SSE", "MSE", "MAPE")
colnames(akurasides.2) <- c("Akurasi lamda=0.6 dan gamma=0.3")
akurasides.2
## Akurasi lamda=0.6 dan gamma=0.3
## SSE 3.222514e+05
## MSE 4.240151e+03
## MAPE 7.276697e-01
#Data Uji
selisihdes1<-ramalandes1$mean-testing_ma$Harga
selisihdes1
## Time Series:
## Start = 77
## End = 100
## Frequency = 1
## [1] 114.64544 144.06818 103.11884 109.32967 163.30368 156.34605 93.06420
## [8] 117.86682 373.27784 310.65663 234.68142 258.63199 203.69780 170.00970
## [15] 117.93732 45.47725 78.95614 63.30808 35.50767 -24.70533 -38.37244
## [22] 34.48536 -14.19590 -79.67502
SSEtestingdes1<-sum(selisihdes1^2)
MSEtestingdes1<-SSEtestingdes1/length(testing_ma$Harga)
MAPEtestingdes1<-sum(abs(selisihdes1/testing_ma$Harga)*100)/length(testing_ma$Harga)
selisihdes2<-ramalandes2$mean-testing_ma$Harga
selisihdes2
## Time Series:
## Start = 77
## End = 100
## Frequency = 1
## [1] 63.42333 94.83693 55.87845 64.08013 120.04499 115.07822 53.78722
## [8] 80.58070 337.98258 277.35222 203.36787 229.30930 176.36596 144.66871
## [15] 94.58719 24.11797 59.58772 45.93051 20.12096 -38.10119 -49.77744
## [22] 25.07121 -21.61920 -85.10746
SSEtestingdes2<-sum(selisihdes2^2)
MSEtestingdes2<-SSEtestingdes2/length(testing_ma$Harga)
MAPEtestingdes2<-sum(abs(selisihdes2/testing_ma$Harga)*100)/length(testing_ma$Harga)
selisihdesopt<-ramalandesopt$mean-testing_ma$Harga
selisihdesopt
## Time Series:
## Start = 77
## End = 100
## Frequency = 1
## [1] 57.933358 87.117342 45.929254 51.901323 105.636575 98.440188
## [7] 34.919581 59.483447 314.655711 251.795749 175.581784 199.293598
## [13] 144.120648 110.193791 57.882657 -14.816173 18.423963 2.537145
## [19] -25.502016 -85.953774 -99.859635 -27.240593 -76.160615 -141.878486
SSEtestingdesopt<-sum(selisihdesopt^2)
MSEtestingdesopt<-SSEtestingdesopt/length(testing_ma$Harga)
MAPEtestingdesopt<-sum(abs(selisihdesopt/testing_ma$Harga)*100)/length(testing_ma$Harga)
akurasitestingdes <-
matrix(c(SSEtestingdes1,MSEtestingdes1,MAPEtestingdes1,SSEtestingdes2,MSEtestingdes2,
MAPEtestingdes2,SSEtestingdesopt,MSEtestingdesopt,MAPEtestingdesopt),
nrow=3,ncol=3)
row.names(akurasitestingdes)<- c("SSE", "MSE", "MAPE")
colnames(akurasitestingdes) <- c("des ske1","des ske2","des opt")
akurasitestingdes
## des ske1 des ske2 des opt
## SSE 5.956591e+05 4.222793e+05 3.558240e+05
## MSE 2.481913e+04 1.759497e+04 1.482600e+04
## MAPE 1.772686e+00 1.425499e+00 1.310315e+00
Perbandingan
MSEfull <-
matrix(c(MSEtesting1,MSEtesting2,MSEtestingopt,MSEtestingdes1,MSEtestingdes2,
MSEtestingdesopt),nrow=3,ncol=2)
row.names(MSEfull)<- c("ske 1", "ske 2", "ske opt")
colnames(MSEfull) <- c("ses","des")
MSEfull
## ses des
## ske 1 278631.5 24819.13
## ske 2 244931.3 17594.97
## ske opt 244758.2 14826.00
Kedua metode dapat dibandingkan dengan menggunakan ukuran akurasi yang sama. Contoh di atas adalah perbandingan kedua metode dengan ukuran akurasi MSE. Hasilnya didapatkan metode DES lebih baik dibandingkan metode SES dilihat dari MSE yang lebih kecil nilainya.
training<-data1[1:76,2]
testing<-data1[77:100,2]
training.ts<-ts(training, frequency = 13)
testing.ts<-ts(testing, frequency = 13)
plot(data1.ts, col="red",main="Plot semua data")
points(data1.ts)
plot(training.ts, col="blue",main="Plot data latih")
points(training.ts)
plot(testing.ts, col="green",main="Plot data uji")
points(testing.ts)
Winter Additive
#Pemulusan dengan winter aditif
winter1 <- HoltWinters(training.ts,alpha=0.2,beta=0.1,gamma=0.1,seasonal = "additive")
winter1$fitted
## Time Series:
## Start = c(2, 1)
## End = c(6, 11)
## Frequency = 13
## xhat level trend season
## 2.000000 7241.381 7278.895 -11.7816438 -25.731954080
## 2.076923 7254.253 7245.005 -13.9924540 23.240101689
## 2.153846 7147.773 7213.525 -15.7412314 -50.010837003
## 2.230769 7100.053 7185.693 -16.9503512 -68.688872541
## 2.307692 7124.861 7163.495 -17.4750208 -21.159237695
## 2.384615 7179.720 7143.211 -17.7559855 54.265416920
## 2.461538 7135.052 7124.417 -17.8597263 28.494232805
## 2.538462 7044.544 7110.606 -17.4548859 -48.607329541
## 2.615385 7070.344 7091.457 -17.6242596 -3.489165080
## 2.692308 7021.595 7090.921 -15.9154570 -53.410740234
## 2.769231 7163.387 7117.526 -11.6634223 57.523943305
## 2.846154 7193.698 7096.670 -12.5826592 109.610782151
## 2.923077 7056.494 7072.293 -13.7621460 -2.036340695
## 3.000000 7027.660 7074.410 -12.1742298 -34.575195086
## 3.076923 7087.416 7081.426 -10.2552015 16.244992345
## 3.153846 7006.372 7071.447 -10.2276165 -54.847316475
## 3.230769 7000.639 7079.797 -8.3698238 -70.787550858
## 3.307692 7059.061 7088.052 -6.7073494 -22.283096352
## 3.384615 7155.057 7105.498 -4.2919546 53.850453663
## 3.461538 7147.189 7119.534 -2.4591757 30.113594391
## 3.538462 7102.742 7151.085 0.9418079 -49.284824414
## 3.615385 7192.383 7184.816 4.2207895 3.346045434
## 3.692308 7155.459 7187.768 4.0938655 -36.402601493
## 3.769231 7264.526 7205.246 5.4323164 53.846995638
## 3.846154 7301.621 7193.058 3.6702006 104.892834926
## 3.923077 7194.154 7187.129 2.7103082 4.315324314
## 4.000000 7153.787 7179.054 1.6317989 -26.899082027
## 4.076923 7172.351 7156.757 -0.7610946 16.355332397
## 4.153846 7063.463 7115.673 -4.7934009 -47.416145768
## 4.230769 7035.948 7105.425 -5.3388467 -64.137653318
## 4.307692 7096.066 7112.759 -4.0715745 -12.621517046
## 4.384615 7133.150 7079.008 -7.0394897 61.181569154
## 4.461538 7073.831 7040.318 -10.2045528 43.717528943
## 4.538462 6945.046 6994.937 -13.7221669 -36.168897901
## 4.615385 6965.162 6976.515 -14.1921332 2.838349173
## 4.692308 6892.999 6940.429 -16.3815472 -31.048797826
## 4.769231 6945.026 6915.467 -17.2395796 46.798532369
## 4.846154 6957.080 6875.535 -19.5088245 101.053265522
## 4.923077 6787.624 6811.577 -23.9537773 0.001287141
## 5.000000 6713.843 6775.482 -25.1678861 -36.470656292
## 5.076923 6748.577 6771.410 -23.0583395 0.226107402
## 5.153846 6704.603 6774.631 -20.4303262 -49.597929201
## 5.230769 6715.306 6791.113 -16.7390880 -59.068564201
## 5.307692 6769.970 6807.854 -13.3911217 -24.493177898
## 5.384615 6859.441 6821.597 -10.6776743 48.521316831
## 5.461538 6853.761 6832.622 -8.5074711 29.647072282
## 5.538462 6823.718 6866.077 -4.3111547 -38.048763169
## 5.615385 6921.036 6924.948 2.0070149 -5.919306869
## 5.692308 6939.384 6967.776 6.0891433 -34.480927119
## 5.769231 7074.298 7025.339 11.2365514 37.721552644
## 5.846154 7163.336 7065.894 14.1683829 83.273454330
## 5.923077 7109.184 7098.070 15.9691005 -4.855148238
## 6.000000 7133.170 7142.398 18.8049643 -28.032469749
## 6.076923 7220.804 7188.529 21.5375771 10.738160781
## 6.153846 7211.343 7223.314 22.8623274 -34.832976516
## 6.230769 7242.956 7263.989 24.6436085 -45.676699174
## 6.307692 7318.254 7305.557 26.3360927 -13.639388001
## 6.384615 7406.766 7324.015 25.5482663 57.202129670
## 6.461538 7381.400 7313.069 21.8988130 46.432337881
## 6.538462 7309.510 7303.531 18.7551660 -12.776085161
## 6.615385 7353.994 7324.599 18.9863741 10.409206885
## 6.692308 7335.590 7331.685 17.7963936 -13.891294519
## 6.769231 7413.732 7346.759 17.5241105 49.448878389
xhat1 <- winter1$fitted[,2]
winter1.opt<- HoltWinters(training.ts, alpha= NULL, beta = NULL, gamma = NULL, seasonal = "additive")
winter1.opt
## Holt-Winters exponential smoothing with trend and additive seasonal component.
##
## Call:
## HoltWinters(x = training.ts, alpha = NULL, beta = NULL, gamma = NULL, seasonal = "additive")
##
## Smoothing parameters:
## alpha: 0.6729542
## beta : 0.04080533
## gamma: 1
##
## Coefficients:
## [,1]
## a 7305.508356
## b 6.100896
## s1 28.084259
## s2 19.991446
## s3 17.085260
## s4 8.721696
## s5 4.842757
## s6 1.568704
## s7 -35.710342
## s8 -4.876252
## s9 46.167244
## s10 82.335957
## s11 12.285380
## s12 24.133614
## s13 8.348578
winter1.opt$fitted
## Time Series:
## Start = c(2, 1)
## End = c(6, 11)
## Frequency = 13
## xhat level trend season
## 2.000000 7241.381 7278.895 -11.7816438 -25.73195408
## 2.076923 7201.148 7192.725 -14.8170993 23.24010169
## 2.153846 7089.032 7154.803 -15.7599046 -50.01083700
## 2.230769 7053.393 7137.889 -15.8069958 -68.68887254
## 2.307692 7099.423 7135.828 -15.2460649 -21.15923770
## 2.384615 7167.579 7128.247 -14.9332888 54.26541692
## 2.461538 7131.745 7117.994 -14.7423321 28.49423280
## 2.538462 7056.395 7119.098 -14.0956868 -48.60732954
## 2.615385 7073.185 7091.328 -14.6536780 -3.48916508
## 2.692308 7066.463 7132.260 -12.3855021 -53.41074023
## 2.769231 7282.495 7232.751 -7.7795259 57.52394330
## 2.846154 7211.185 7113.887 -12.3123857 109.61078215
## 2.923077 7033.671 7050.120 -14.4120084 -2.03634070
## 3.000000 7031.007 7104.496 -11.6050635 -61.88376339
## 3.076923 7158.159 7155.210 -9.0621345 12.01143554
## 3.153846 7037.930 7099.469 -10.9668799 -50.57168549
## 3.230769 7058.484 7129.775 -9.2827211 -62.00827161
## 3.307692 7111.481 7137.504 -8.5885476 -17.43412239
## 3.384615 7224.740 7174.912 -6.7116548 56.53968172
## 3.461538 7213.062 7182.975 -6.1087377 36.19567812
## 3.538462 7188.471 7246.972 -3.2480693 -55.25291815
## 3.615385 7318.787 7296.362 -1.1001413 23.52446070
## 3.692308 7202.628 7205.928 -4.7454598 1.44572948
## 3.769231 7213.811 7214.476 -4.2030199 3.53827884
## 3.846154 7264.485 7185.110 -5.2297830 84.60459886
## 3.923077 7198.439 7172.573 -5.5279744 31.39394116
## 4.000000 7089.148 7127.872 -7.1264212 -31.59786879
## 4.076923 7064.419 7083.729 -8.6368908 -10.67378955
## 4.153846 6970.325 7012.048 -11.2094342 -30.51361271
## 4.230769 6982.022 7045.163 -9.4007533 -53.74077185
## 4.307692 7113.433 7114.694 -6.1799528 4.91938187
## 4.384615 7049.951 6996.963 -10.7318258 63.72033212
## 4.461538 6993.196 6935.723 -12.7928225 70.26579883
## 4.538462 6813.754 6858.834 -15.4082772 -29.67142542
## 4.615385 6883.627 6915.966 -12.4482552 -19.89072206
## 4.692308 6879.409 6884.718 -13.2153803 7.90610543
## 4.769231 6829.067 6851.777 -14.0202787 -8.69031240
## 4.846154 6906.539 6839.438 -13.9517031 81.05318466
## 4.923077 6703.625 6709.935 -18.6667977 12.35668581
## 5.000000 6639.330 6706.944 -18.0271374 -49.58732933
## 5.076923 6755.646 6810.043 -13.0845560 -41.31238732
## 5.153846 6861.985 6880.628 -9.6703784 -8.97250048
## 5.230769 6864.943 6889.248 -8.9240169 -15.38153228
## 5.307692 6834.548 6892.277 -8.4362933 -49.29272630
## 5.384615 6964.374 6931.684 -6.4840385 39.17417795
## 5.461538 6960.337 6927.607 -6.3858062 39.11613400
## 5.538462 6992.728 6990.697 -3.5508302 5.58198292
## 5.615385 7057.458 7086.002 0.4830045 -29.02706471
## 5.692308 7132.695 7132.033 2.3416291 -1.68011028
## 5.769231 7173.712 7177.484 4.1007282 -7.87358821
## 5.846154 7243.627 7213.334 5.3962314 24.89713189
## 5.923077 7250.927 7225.288 5.6638358 19.97494206
## 6.000000 7245.929 7230.986 5.6652216 9.27805614
## 6.076923 7258.386 7252.716 6.3207467 -0.65005636
## 6.153846 7285.344 7278.320 7.1076378 -0.08345001
## 6.230769 7293.513 7295.564 7.5212625 -9.57281895
## 6.307692 7308.426 7326.011 8.4567509 -26.04167117
## 6.384615 7362.562 7314.573 7.6449293 40.34410949
## 6.461538 7305.898 7229.169 3.8480405 72.88026216
## 6.538462 7233.280 7178.051 1.6051049 53.62433533
## 6.615385 7235.860 7238.735 4.0158515 -6.89113111
## 6.692308 7307.106 7282.210 5.6259908 19.27048984
## 6.769231 7311.432 7297.842 6.0343188 7.55565605
xhat1.opt <- winter1.opt$fitted[,2]
Peramalan
#Forecast
forecast1 <- predict(winter1, n.ahead = 24)
forecast1.opt <- predict(winter1.opt, n.ahead = 24)
#Visualisasi
#Plot time series
plot(training.ts,main="Winter 0.2;0.1;0.1",type="l",col="black",
xlim=c(1,25),pch=12)
lines(xhat1,type="l",col="red")
lines(xhat1.opt,type="l",col="blue")
lines(forecast1,type="l",col="red")
lines(forecast1.opt,type="l",col="blue")
legend("topleft",c("Actual Data",expression(paste(winter1)),
expression(paste(winter1.opt))),cex=0.5,
col=c("black","red","blue"),lty=1)
Akurasi
#Akurasi data training
SSE1<-winter1$SSE
MSE1<-winter1$SSE/length(training.ts)
RMSE1<-sqrt(MSE1)
akurasi1 <- matrix(c(SSE1,MSE1,RMSE1))
row.names(akurasi1)<- c("SSE", "MSE", "RMSE")
colnames(akurasi1) <- c("Akurasi")
akurasi1
## Akurasi
## SSE 961993.3350
## MSE 12657.8070
## RMSE 112.5069
SSE1.opt<-winter1.opt$SSE
MSE1.opt<-winter1.opt$SSE/length(training.ts)
RMSE1.opt<-sqrt(MSE1.opt)
akurasi1.opt <- matrix(c(SSE1.opt,MSE1.opt,RMSE1.opt))
row.names(akurasi1.opt)<- c("SSE1.opt", "MSE1.opt", "RMSE1.opt")
colnames(akurasi1.opt) <- c("Akurasi")
akurasi1.opt
## Akurasi
## SSE1.opt 409698.64316
## MSE1.opt 5390.77162
## RMSE1.opt 73.42187
akurasi1.train = data.frame(Model_Winter = c("Winter 1","Winter1 optimal"),
Nilai_SSE=c(SSE1,SSE1.opt),
Nilai_MSE=c(MSE1,MSE1.opt),Nilai_RMSE=c(RMSE1,RMSE1.opt))
akurasi1.train
## Model_Winter Nilai_SSE Nilai_MSE Nilai_RMSE
## 1 Winter 1 961993.3 12657.807 112.50692
## 2 Winter1 optimal 409698.6 5390.772 73.42187
#Akurasi Data Testing
forecast1<-data.frame(forecast1)
testing.ts<-data.frame(testing.ts)
selisih1<-forecast1-testing.ts
SSEtesting1<-sum(selisih1^2)
MSEtesting1<-SSEtesting1/length(testing.ts)
forecast1.opt<-data.frame(forecast1.opt)
selisih1.opt<-forecast1.opt-testing.ts
SSEtesting1.opt<-sum(selisih1.opt^2)
MSEtesting1.opt<-SSEtesting1.opt/length(testing.ts)
Winter Multiplikaif
#Pemulusan dengan winter multiplikatif
winter2 <- HoltWinters(training.ts,alpha=0.2,beta=0.1,gamma=0.3,seasonal = "multiplicative")
winter2$fitted
## Time Series:
## Start = c(2, 1)
## End = c(6, 11)
## Frequency = 13
## xhat level trend season
## 2.000000 7240.913 7278.895 -11.781644 0.9963947
## 2.076923 7254.456 7245.019 -13.991049 1.0032399
## 2.153846 7147.276 7213.556 -15.738234 0.9929782
## 2.230769 7099.757 7185.741 -16.945902 0.9903696
## 2.307692 7124.770 7163.558 -17.469696 0.9970168
## 2.384615 7179.703 7143.288 -17.749665 1.0076015
## 2.461538 7134.903 7124.512 -17.852288 1.0039741
## 2.538462 7045.855 7110.722 -17.446077 0.9933147
## 2.615385 7070.343 7091.307 -17.642998 0.9995305
## 2.692308 7022.542 7090.760 -15.933371 0.9926097
## 2.769231 7162.607 7117.473 -11.668754 1.0079938
## 2.846154 7192.434 7096.839 -12.565229 1.0152676
## 2.923077 7057.148 7072.906 -13.702069 0.9997088
## 3.000000 7011.613 7074.957 -12.126769 0.9927484
## 3.076923 7077.815 7085.393 -9.870431 1.0003240
## 3.153846 7004.282 7077.718 -9.650913 0.9909756
## 3.230769 7005.172 7087.236 -7.734026 0.9895006
## 3.307692 7064.769 7095.387 -6.145519 0.9965479
## 3.384615 7161.295 7112.334 -3.836299 1.0074273
## 3.461538 7156.522 7125.452 -2.140870 1.0046624
## 3.538462 7106.152 7155.305 1.058524 0.9929837
## 3.615385 7210.415 7188.698 4.291986 1.0024224
## 3.692308 7190.110 7188.126 3.805615 0.9997467
## 3.769231 7249.412 7198.388 4.451214 1.0064659
## 3.846154 7287.047 7188.334 3.000755 1.0133094
## 3.923077 7204.185 7184.739 2.341109 1.0023799
## 4.000000 7150.572 7174.319 1.065034 0.9965421
## 4.076923 7155.725 7152.017 -1.271647 1.0006963
## 4.153846 7067.516 7113.774 -4.968854 0.9941919
## 4.230769 7041.257 7102.503 -5.599020 0.9921590
## 4.307692 7107.237 7108.607 -4.428746 1.0004306
## 4.384615 7137.453 7072.278 -7.618713 1.0103038
## 4.461538 7092.232 7032.480 -10.836671 1.0100530
## 4.538462 6956.946 6983.174 -14.683634 0.9983434
## 4.615385 6957.179 6961.399 -15.392777 1.0016085
## 4.692308 6914.005 6925.741 -17.419274 1.0008227
## 4.769231 6904.560 6895.551 -18.696378 1.0040289
## 4.846154 6925.589 6862.314 -20.150447 1.0121929
## 4.923077 6782.182 6804.471 -23.919635 1.0002404
## 5.000000 6694.804 6769.502 -25.024631 0.9926350
## 5.076923 6709.639 6769.565 -22.515821 0.9944553
## 5.153846 6715.784 6781.307 -19.090042 0.9931334
## 5.230769 6741.653 6797.133 -15.598439 0.9941190
## 5.307692 6763.272 6809.912 -12.760721 0.9950156
## 5.384615 6848.287 6825.768 -9.899046 1.0047562
## 5.461538 6855.236 6839.688 -7.517086 1.0033759
## 5.538462 6850.569 6873.699 -3.364278 0.9971230
## 5.615385 6917.520 6928.313 2.433534 0.9980916
## 5.692308 6969.162 6972.351 6.593908 0.9985983
## 5.769231 7046.063 7024.527 11.152147 1.0014759
## 5.846154 7123.955 7070.593 14.643514 1.0054648
## 5.923077 7115.945 7110.979 17.217781 0.9982812
## 6.000000 7154.002 7155.250 19.923078 0.9970495
## 6.076923 7224.140 7198.401 22.245902 1.0004838
## 6.153846 7251.321 7233.221 23.503327 0.9992553
## 6.230769 7284.400 7266.549 24.485793 0.9990901
## 6.307692 7325.186 7299.679 25.350171 1.0000215
## 6.384615 7405.921 7315.764 24.423723 1.0089552
## 6.461538 7403.021 7304.185 20.823409 1.0106502
## 6.538462 7359.052 7289.624 17.285021 1.0071361
## 6.615385 7354.221 7299.367 16.530776 1.0052383
## 6.692308 7366.013 7304.015 15.342488 1.0063743
## 6.769231 7379.353 7310.606 14.467329 1.0074101
xhat2 <- winter2$fitted[,2]
winter2.opt<- HoltWinters(training.ts, alpha= NULL, beta = NULL, gamma = NULL, seasonal = "multiplicative")
winter2.opt$fitted
## Time Series:
## Start = c(2, 1)
## End = c(6, 11)
## Frequency = 13
## xhat level trend season
## 2.000000 7240.913 7278.895 -11.7816438 0.9963947
## 2.076923 7200.945 7192.536 -14.8462987 1.0032399
## 2.153846 7088.804 7154.723 -15.7900935 0.9929782
## 2.230769 7053.501 7137.921 -15.8316423 0.9903696
## 2.307692 7099.435 7135.940 -15.2624868 0.9970168
## 2.384615 7167.509 7128.382 -14.9458961 1.0076015
## 2.461538 7131.619 7118.142 -14.7524886 1.0039741
## 2.538462 7057.710 7119.309 -14.0983115 0.9933147
## 2.615385 7072.482 7090.507 -14.7025479 0.9995305
## 2.692308 7067.060 7132.067 -12.3905097 0.9926097
## 2.769231 7283.390 7233.349 -7.7192973 1.0079938
## 2.846154 7210.623 7114.476 -12.2869829 1.0152676
## 2.923077 7035.311 7051.721 -14.3608911 0.9997088
## 3.000000 7032.863 7105.280 -11.5698321 0.9914224
## 3.076923 7158.551 7155.504 -9.0305092 1.0016900
## 3.153846 7038.245 7099.461 -10.9624124 0.9929105
## 3.230769 7058.739 7129.985 -9.2576140 0.9912948
## 3.307692 7111.641 7137.769 -8.5573085 0.9975354
## 3.384615 7225.490 7175.360 -6.6609072 1.0079221
## 3.461538 7213.100 7182.902 -6.0772534 1.0050546
## 3.538462 7187.964 7246.773 -3.2028144 0.9923233
## 3.615385 7320.390 7297.130 -1.0018846 1.0033254
## 3.692308 7201.855 7205.728 -4.7167477 1.0001172
## 3.769231 7213.708 7214.867 -4.1473531 1.0004144
## 3.846154 7264.893 7185.557 -5.1813591 1.0117705
## 3.923077 7198.245 7172.858 -5.4902870 1.0043081
## 4.000000 7089.537 7128.370 -7.0928655 0.9955430
## 4.076923 7064.446 7083.713 -8.6365070 0.9984976
## 4.153846 6970.312 7011.718 -11.2401297 0.9956910
## 4.230769 6982.506 7045.144 -9.4046226 0.9924338
## 4.307692 7113.486 7115.195 -6.1394969 1.0006231
## 4.384615 7048.497 6997.185 -10.7366814 1.0088813
## 4.461538 6991.768 6937.199 -12.7605165 1.0097236
## 4.538462 6817.816 6861.712 -15.3381463 0.9958287
## 4.615385 6885.381 6916.696 -12.4483670 0.9972674
## 4.692308 6878.032 6884.149 -13.2742865 1.0010416
## 4.769231 6829.292 6852.036 -14.0484347 0.9987284
## 4.846154 6902.395 6839.523 -13.9853387 1.0112602
## 4.923077 6706.234 6713.677 -18.5820865 1.0016637
## 5.000000 6644.205 6709.036 -18.0091945 0.9930022
## 5.076923 6757.236 6810.080 -13.1169026 0.9941552
## 5.153846 6861.889 6880.312 -9.6917898 0.9987292
## 5.230769 6864.765 6889.058 -8.9341300 0.9977677
## 5.307692 6835.113 6892.261 -8.4353464 0.9929236
## 5.384615 6962.948 6931.779 -6.4647968 1.0054342
## 5.461538 6958.905 6928.673 -6.3267379 1.0052812
## 5.538462 6994.103 6992.639 -3.4381859 1.0007015
## 5.615385 7058.668 7087.373 0.5960599 0.9958661
## 5.692308 7133.460 7133.031 2.4478303 0.9997170
## 5.769231 7174.067 7178.221 4.2042539 0.9988363
## 5.846154 7242.325 7214.071 5.5047009 1.0031510
## 5.923077 7252.099 7227.011 5.8102097 1.0026654
## 6.000000 7247.666 7232.065 5.7791568 1.0013570
## 6.076923 7258.807 7252.767 6.3923921 0.9999515
## 6.153846 7285.511 7278.222 7.1757169 1.0000156
## 6.230769 7292.916 7295.454 7.5889646 0.9986134
## 6.307692 7307.371 7326.477 8.5519552 0.9962295
## 6.384615 7364.984 7315.710 7.7580767 1.0056689
## 6.461538 7306.277 7229.024 3.8770551 1.0101447
## 6.538462 7232.617 7178.060 1.6234383 1.0073727
## 6.615385 7235.009 7238.961 4.0593463 0.9988940
## 6.692308 7307.754 7283.223 5.7114224 1.0025819
## 6.769231 7311.519 7298.511 6.1049553 1.0009451
xhat2.opt <- winter2.opt$fitted[,2]
Peramalan
#Forecast
forecast2 <- predict(winter2, n.ahead = 24)
forecast2.opt <- predict(winter2.opt, n.ahead = 24)
Visualisasi
#Plot time series
plot(training.ts,main="Winter 0.2;0.1;0.1",type="l",col="black",
xlim=c(1,25),pch=12)
lines(xhat2,type="l",col="red")
lines(xhat2.opt,type="l",col="blue")
lines(forecast2,type="l",col="red")
lines(forecast2.opt,type="l",col="blue")
legend("topleft",c("Actual Data",expression(paste(winter2)),
expression(paste(winter2.opt))),cex=0.5,
col=c("black","red","blue"),lty=1)
Akurasi
#Akurasi data training
SSE2<-winter2$SSE
MSE2<-winter2$SSE/length(training.ts)
RMSE2<-sqrt(MSE2)
akurasi1 <- matrix(c(SSE2,MSE2,RMSE2))
row.names(akurasi1)<- c("SSE2", "MSE2", "RMSE2")
colnames(akurasi1) <- c("Akurasi lamda=0.2")
akurasi1
## Akurasi lamda=0.2
## SSE2 919264.3892
## MSE2 12095.5841
## RMSE2 109.9799
SSE2.opt<-winter2.opt$SSE
MSE2.opt<-winter2.opt$SSE/length(training.ts)
RMSE2.opt<-sqrt(MSE2.opt)
akurasi1.opt <- matrix(c(SSE2.opt,MSE2.opt,RMSE2.opt))
row.names(akurasi1.opt)<- c("SSE2.opt", "MSE2.opt", "RMSE2.opt")
colnames(akurasi1.opt) <- c("Akurasi")
akurasi1.opt
## Akurasi
## SSE2.opt 404918.01924
## MSE2.opt 5327.86867
## RMSE2.opt 72.99225
akurasi2.train = data.frame(Model_Winter = c("Winter 1","winter2 optimal"),
Nilai_SSE=c(SSE2,SSE2.opt),
Nilai_MSE=c(MSE2,MSE2.opt),Nilai_RMSE=c(RMSE2,RMSE2.opt))
akurasi2.train
## Model_Winter Nilai_SSE Nilai_MSE Nilai_RMSE
## 1 Winter 1 919264.4 12095.584 109.97993
## 2 winter2 optimal 404918.0 5327.869 72.99225
#Akurasi Data Testing
forecast2<-data.frame(forecast2)
testing.ts<-data.frame(testing.ts)
selisih2<-forecast2-testing.ts
SSEtesting2<-sum(selisih2^2)
MSEtesting2<-SSEtesting2/length(testing.ts)
forecast2.opt<-data.frame(forecast2.opt)
selisih2.opt<-forecast2.opt-testing.ts
SSEtesting2.opt<-sum(selisih2.opt^2)
MSEtesting2.opt<-SSEtesting2.opt/length(testing.ts)